home *** CD-ROM | disk | FTP | other *** search
- # Source Generated with Decompyle++
- # File: in.pyc (Python 2.6)
-
- from base.g import *
- from base import utils
- import os.path as os
- import re
- import glob
-
- try:
- import datetime
- datetime_avail = True
- except ImportError:
- datetime_avail = False
- datetime = None
-
- pat_prod_num = re.compile('(\\d+)', re.I)
- TYPE_UNKNOWN = 0
- TYPE_STRING = 1
- TYPE_STR = 1
- TYPE_LIST = 2
- TYPE_BOOL = 3
- TYPE_INT = 4
- TYPE_HEX = 5
- TYPE_BITFIELD = 6
- TYPE_URI = TYPE_STR
- TYPE_DATE = 8
- TECH_CLASSES = [
- 'Undefined',
- 'Unsupported',
- 'Postscript',
- 'DJGenericVIP',
- 'LJMono',
- 'LJColor',
- 'LJFastRaster',
- 'LJJetReady',
- 'DJ350',
- 'DJ540',
- 'DJ600',
- 'DJ6xx',
- 'DJ6xxPhoto',
- 'DJ630',
- 'DJ8xx',
- 'DJ8x5',
- 'DJ850',
- 'DJ890',
- 'DJ9xx',
- 'DJ9xxVIP',
- 'DJ3600',
- 'DJ3320',
- 'DJ4100',
- 'AP2xxx',
- 'AP21xx',
- 'AP2560',
- 'PSP100',
- 'PSP470',
- 'LJZjsMono',
- 'LJZjsColor',
- 'LJm1005',
- 'QuickConnect',
- 'DJ55xx',
- 'OJProKx50',
- 'LJP1XXX',
- 'DJD2600']
- TECH_CLASSES.sort()
- TECH_CLASS_PDLS = {
- 'Postscript': 'ps',
- 'DJGenericVIP': 'pcl3',
- 'LJMono': 'pcl3',
- 'LJColor': 'pcl3',
- 'LJFastRaster': 'pclxl',
- 'LJJetReady': 'pclxl',
- 'DJ350': 'pcl3',
- 'DJ540': 'pcl3',
- 'DJ600': 'pcl3',
- 'DJ6xx': 'pcl3',
- 'DJ6xxPhoto': 'pcl3',
- 'DJ630': 'pcl3',
- 'DJ8xx': 'pcl3',
- 'DJ8x5': 'pcl3',
- 'DJ850': 'pcl3',
- 'DJ890': 'pcl3',
- 'DJ9xx': 'pcl3',
- 'DJ9xxVIP': 'pcl3',
- 'DJ3600': 'lidil',
- 'DJ3320': 'lidil',
- 'DJ4100': 'lidil',
- 'AP2xxx': 'pcl3',
- 'AP21xx': 'pcl3',
- 'AP2560': 'pcl3',
- 'PSP100': 'pcl3',
- 'PSP470': 'pcl3',
- 'LJZjsMono': 'zjs',
- 'LJZjsColor': 'zjs',
- 'LJm1005': 'zxs',
- 'QuickConnect': 'jpeg',
- 'DJ55xx': 'pcl3',
- 'OJProKx50': 'pcl3',
- 'LJP1XXX': 'zxs' }
- PDL_TYPE_PCL = 0
- PDL_TYPE_PS = 1
- PDL_TYPE_HOST = 2
- PDL_TYPES = {
- 'pcl3': PDL_TYPE_PCL,
- 'pcl5': PDL_TYPE_PCL,
- 'pcl6': PDL_TYPE_PCL,
- 'pcl5e': PDL_TYPE_PCL,
- 'pcl': PDL_TYPE_PCL,
- 'pclxl': PDL_TYPE_PCL,
- 'ps': PDL_TYPE_PS,
- 'lidil': PDL_TYPE_HOST,
- 'zjs': PDL_TYPE_HOST,
- 'zjstream': PDL_TYPE_HOST,
- 'zxs': PDL_TYPE_HOST,
- 'zxstream': PDL_TYPE_HOST,
- 'jpeg': PDL_TYPE_HOST,
- 'jpg': PDL_TYPE_HOST,
- 'jetready': PDL_TYPE_HOST,
- 'jr': PDL_TYPE_HOST }
- TECH_SUBCLASSES = [
- 'LargeFormatSuperB',
- 'LargeFormatA3',
- 'CoverMedia',
- 'FullBleed',
- 'Duplex',
- 'Normal',
- 'Apollo2000',
- 'Apollo2200',
- 'Apollo2500',
- 'NoPhotoMode',
- 'NoPhotoBestHiresModes',
- 'No1200dpiNoSensor',
- 'NoFullBleed',
- '4x6FullBleed',
- '300dpiOnly',
- 'GrayscaleOnly',
- 'NoAutoTray',
- 'NoEvenDuplex']
- TECH_SUBCLASSES.sort()
- MODEL_UI_REPLACEMENTS = {
- 'laserjet': 'LaserJet',
- 'psc': 'PSC',
- 'hp': 'HP',
- 'mfp': 'MFP' }
-
- def normalizeModelUIName(model):
- ml = model.lower().strip()
- if 'apollo' in ml:
- z = ml.replace('_', ' ')
- elif ml.startswith('hp'):
- z = ml[3:].replace('_', ' ')
- else:
- z = ml.replace('_', ' ')
- y = []
- for x in z.split():
- if pat_prod_num.search(x):
- y.append(x)
- continue
- y.append(MODEL_UI_REPLACEMENTS.get(x, x.capitalize()))
-
- if 'apollo' in ml:
- return ' '.join(y)
- return 'HP ' + ' '.join(y)
-
-
- def normalizeModelName(model):
- return utils.xstrip(model.replace(' ', '_').replace('__', '_').replace('~', '').replace('/', '_'), '_')
-
-
- class ModelData:
-
- def __init__(self, root_path = None):
- if root_path is None:
- self.root_path = prop.models_dir
- else:
- self.root_path = root_path
- self._ModelData__cache = { }
- self.reset_includes()
- self.sec = re.compile('^\\[(.*)\\]')
- self.inc = re.compile('^\\%include (.*)', re.I)
- self.inc_line = re.compile('^\\%(.*)\\%')
- self.eq = re.compile('^([^=]+)=(.*)')
- self.date = re.compile('^(\\d{1,2})/(\\d{1,2})/(\\d{4,4})')
- files = [
- (os.path.join(self.root_path, 'models.dat'), os.path.join(self.root_path, 'unreleased', 'unreleased.dat')),
- (os.path.join(os.getcwd(), 'data', 'models', 'models.dat'), os.path.join(os.getcwd(), 'data', 'models', 'unreleased', 'unreleased.dat'))]
- for None in files:
- (self.released_dat, self.unreleased_dat) = None
- if os.path.exists(self.released_dat):
- break
- continue
- else:
- (self.released_dat, self.unreleased_dat) = (None, None)
- self.FIELD_TYPES = {
- 'align-type': TYPE_INT,
- 'clean-type': TYPE_INT,
- 'color-cal-type': TYPE_INT,
- 'copy-type': TYPE_INT,
- 'embedded-server-type': TYPE_INT,
- 'fax-type': TYPE_INT,
- 'fw-download': TYPE_BOOL,
- 'icon': TYPE_STR,
- 'io-mfp-mode': TYPE_INT,
- 'io-mode': TYPE_INT,
- 'io-support': TYPE_BITFIELD,
- 'job-storage': TYPE_INT,
- 'monitor-type': TYPE_INT,
- 'linefeed-cal-type': TYPE_INT,
- 'panel-check-type': TYPE_INT,
- 'pcard-type': TYPE_INT,
- 'plugin': TYPE_INT,
- 'plugin-reason': TYPE_BITFIELD,
- 'power-settings': TYPE_INT,
- 'pq-diag-type': TYPE_INT,
- 'r-type': TYPE_INT,
- 'scan-style': TYPE_INT,
- 'scan-type': TYPE_INT,
- 'status-battery-check': TYPE_INT,
- 'status-dynamic-counters': TYPE_INT,
- 'status-type': TYPE_INT,
- 'support-subtype': TYPE_HEX,
- 'support-released': TYPE_BOOL,
- 'support-type': TYPE_INT,
- 'support-ver': TYPE_STR,
- 'tech-class': TYPE_LIST,
- 'tech-subclass': TYPE_LIST,
- 'tech-type': TYPE_INT,
- 'usb-pid': TYPE_HEX,
- 'usb-vid': TYPE_HEX,
- 'wifi-config': TYPE_INT }
- self.FIELD_TYPES_DYN = {
- 'dev-file': TYPE_STR,
- 'fax-uri': TYPE_STR,
- 'scan-uri': TYPE_STR,
- 'is-hp': TYPE_BOOL,
- 'host': TYPE_STR,
- 'status-desc': TYPE_STR,
- 'cups-printers': TYPE_STR,
- 'serial': TYPE_STR,
- 'error-state': TYPE_INT,
- 'device-state': TYPE_INT,
- 'panel': TYPE_INT,
- 'device-uri': TYPE_STR,
- 'panel-line1': TYPE_STR,
- 'panel-line2': TYPE_STR,
- 'back-end': TYPE_STR,
- 'port': TYPE_INT,
- 'deviceid': TYPE_STR,
- 'cups-uri': TYPE_STR,
- 'status-code': TYPE_INT,
- 'rs': TYPE_STR,
- 'rr': TYPE_STR,
- 'rg': TYPE_STR,
- 'r': TYPE_INT,
- 'duplexer': TYPE_INT,
- 'supply-door': TYPE_INT,
- 'revision': TYPE_INT,
- 'media-path': TYPE_INT,
- 'top-door': TYPE_BOOL,
- 'photo-tray': TYPE_BOOL }
- self.RE_FIELD_TYPES = {
- re.compile('^r(\\d+)-agent(\\d+)-kind', re.IGNORECASE): TYPE_INT,
- re.compile('^r(\\d+)-agent(\\d+)-type', re.IGNORECASE): TYPE_INT,
- re.compile('^r(\\d+)-agent(\\d+)-sku', re.IGNORECASE): TYPE_STR,
- re.compile('^agent(\\d+)-desc', re.IGNORECASE): TYPE_STR,
- re.compile('^agent(\\d+)-virgin', re.IGNORECASE): TYPE_BOOL,
- re.compile('^agent(\\d+)-dvc', re.IGNORECASE): TYPE_INT,
- re.compile('^agent(\\d+)-kind', re.IGNORECASE): TYPE_INT,
- re.compile('^agent(\\d+)-type', re.IGNORECASE): TYPE_INT,
- re.compile('^agent(\\d+)-id', re.IGNORECASE): TYPE_INT,
- re.compile('^agent(\\d+)-hp-ink', re.IGNORECASE): TYPE_BOOL,
- re.compile('^agent(\\d+)-health-desc', re.IGNORECASE): TYPE_STR,
- re.compile('^agent(\\d+)-health$', re.IGNORECASE): TYPE_INT,
- re.compile('^agent(\\d+)-known', re.IGNORECASE): TYPE_BOOL,
- re.compile('^agent(\\d+)-level', re.IGNORECASE): TYPE_INT,
- re.compile('^agent(\\d+)-ack', re.IGNORECASE): TYPE_BOOL,
- re.compile('^agent(\\d+)-sku', re.IGNORECASE): TYPE_STR,
- re.compile('^in-tray(\\d+)', re.IGNORECASE): TYPE_BOOL,
- re.compile('^out-tray(\\d+)', re.IGNORECASE): TYPE_BOOL,
- re.compile('^model(\\d+)', re.IGNORECASE): TYPE_STR }
- self.TYPE_CACHE = { }
-
-
- def read_all_files(self, unreleased = True):
- if os.path.exists(self.released_dat):
- self.read_section(self.released_dat)
- if self.unreleased_dat is not None and os.path.exists(self.unreleased_dat):
- self.read_section(self.unreleased_dat)
-
-
- return self._ModelData__cache
-
-
- def read_section(self, filename, section = None, is_include = False):
- found = False
- in_section = False
- if section is not None:
- section = section.lower()
- if is_include:
- log.debug('Searching for include [%s] in file %s' % (section, filename))
- else:
- log.debug('Searching for section [%s] in file %s' % (section, filename))
-
- if is_include:
- cache = self._ModelData__includes
- else:
- cache = self._ModelData__cache
-
- try:
- fd = file(filename)
- except IOError:
- e = None
- log.error('I/O Error: %s (%s)' % (filename, e.strerror))
- return False
-
- while True:
- line = fd.readline()
- if not line:
- break
-
- if line[0] in ('#', ';'):
- continue
-
- if line[0] == '[':
- if in_section and section is not None:
- break
-
- match = self.sec.search(line)
- if match is not None:
- in_section = True
- read_section = match.group(1).lower()
- if section is not None:
- found = in_section = read_section == section
-
- if in_section:
- if section is not None:
- log.debug('Found section [%s] in file %s' % (read_section, filename))
-
- cache[read_section] = { }
-
- in_section
- continue
-
- if line[0] == '%':
- match = self.inc.match(line)
- if match is not None:
- inc_file = match.group(1)
- log.debug('Found include file directive: %%include %s' % inc_file)
- self._ModelData__include_files.append(os.path.join(os.path.dirname(filename), inc_file))
- continue
-
- if in_section:
- match = self.inc_line.match(line)
- if match is not None:
- inc_sect = match.group(1)
- log.debug('Found include directive %%%s%%' % inc_sect)
-
- try:
- self._ModelData__includes[inc_sect]
- except KeyError:
- for inc in self._ModelData__include_files:
- if self.read_section(inc, inc_sect, True):
- break
- continue
-
-
-
- None<EXCEPTION MATCH>KeyError
-
-
- if in_section:
- match = self.eq.search(line)
- if match is not None:
- key = match.group(1)
- value = match.group(2)
- value = self.convert_data(key, value)
- cache[read_section][key] = value
-
- match is not None
- fd.close()
- return found
-
-
- def reset_includes(self):
- self._ModelData__include_files = []
- self._ModelData__includes = { }
-
-
- def __getitem__(self, model):
- model = model.lower()
-
- try:
- return self._ModelData__cache[model]
- except:
- log.debug('Cache miss: %s' % model)
- log.debug('Reading file: %s' % self.released_dat)
- if self.read_section(self.released_dat, model):
- return self._ModelData__cache[model]
- return { }
-
-
-
- def all_models(self):
- return self._ModelData__cache
-
-
- def get_data_type(self, key):
-
- try:
- return self.FIELD_TYPES[key]
- except KeyError:
-
- try:
- return self.FIELD_TYPES_DYN[key]
- except KeyError:
-
- try:
- return self.TYPE_CACHE[key]
- except KeyError:
- for pat, typ in self.RE_FIELD_TYPES.items():
- match = pat.match(key)
- if match is not None:
- self.TYPE_CACHE[key] = typ
- return typ
-
- except:
- match is not None
-
-
- None<EXCEPTION MATCH>KeyError
-
-
- None<EXCEPTION MATCH>KeyError
-
- log.error('get_data_type(): Field type lookup failed for key %s' % key)
-
-
- def convert_data(self, key, value, typ = None):
- if typ is None:
- typ = self.get_data_type(key)
-
- if typ in (TYPE_BITFIELD, TYPE_INT):
-
- try:
- value = int(value)
- except (ValueError, TypeError):
- log.error('Invalid value in .dat file: %s=%s' % (key, value))
- value = 0
- except:
- None<EXCEPTION MATCH>(ValueError, TypeError)
-
-
- None<EXCEPTION MATCH>(ValueError, TypeError)
- if typ == TYPE_BOOL:
- value = utils.to_bool(value)
- elif typ == TYPE_LIST:
- value = _[1]
- elif typ == TYPE_DATE:
- if datetime_avail:
- match = self.date.search(value)
- if match is not None:
- month = int(match.group(1))
- day = int(match.group(2))
- year = int(match.group(3))
- value = datetime.date(year, month, day)
-
-
- elif typ == TYPE_HEX:
-
- try:
- value = int(value, 16)
- except (ValueError, TypeError):
- log.error('Invalid hex value in .dat file: %s=%s' % (key, value))
- value = 0
- except:
- None<EXCEPTION MATCH>(ValueError, TypeError)
-
-
- None<EXCEPTION MATCH>(ValueError, TypeError)
- return value
-
-
-